runtime.mspan.base (method)
74 uses
runtime (current package)
arena.go#L495: userArenaHeapBitsSetSliceType(typ, cap, ptr, s.base())
arena.go#L497: userArenaHeapBitsSetType(typ, ptr, s.base())
arena.go#L666: x := unsafe.Pointer(span.base())
arena.go#L673: gcmarknewobject(span, span.base(), span.elemsize)
arena.go#L678: racemalloc(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L683: msanmalloc(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L691: span.userArenaChunkFree = makeAddrRange(span.base(), span.limit)
arena.go#L693: asanunpoison(unsafe.Pointer(span.base()), span.elemsize)
arena.go#L705: profilealloc(mp, unsafe.Pointer(span.base()), userArenaChunkBytes)
arena.go#L718: tracealloc(unsafe.Pointer(span.base()), userArenaChunkBytes, nil)
arena.go#L782: sysFault(unsafe.Pointer(s.base()), s.npages*pageSize)
arena.go#L811: racefree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L849: racefree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L852: msanfree(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L855: asanpoison(unsafe.Pointer(s.base()), s.elemsize)
arena.go#L905: base = s.base()
arena.go#L978: s.limit = s.base() + userArenaChunkBytes
arena.go#L995: memclrNoHeapPointers(unsafe.Pointer(s.base()), s.elemsize)
heapdump.go#L462: p := unsafe.Pointer(s.base() + uintptr(spf.special.offset))
heapdump.go#L484: p := s.base()
heapdump.go#L661: p := s.base() + uintptr(spp.special.offset)
malloc.go#L904: return gclinkptr(result*s.elemsize + s.base())
malloc.go#L940: v = gclinkptr(freeIndex*s.elemsize + s.base())
malloc.go#L1131: x = unsafe.Pointer(span.base())
mbitmap.go#L226: return s.divideByElemSize(p - s.base())
mbitmap.go#L313: print(" span.base()=", hex(s.base()), " span.limit=", hex(s.limit), " span.state=", state)
mbitmap.go#L356: if state := s.state.get(); state != mSpanInUse || p < s.base() || p >= s.limit {
mbitmap.go#L370: base = s.base() + objIndex*s.elemsize
mbitmap.go#L564: } else if s.state.get() != mSpanInUse || dst < s.base() || s.limit <= dst {
mbitmap.go#L731: base := s.base()
mbitmap.go#L741: h := writeHeapBitsForAddr(s.base())
mbitmap.go#L747: h.flush(s.base(), size)
mcache.go#L254: s.limit = s.base() + size
mcentral.go#L254: s.limit = s.base() + size*n
mgcmark.go#L385: p := s.base() + uintptr(spf.special.offset)/s.elemsize*s.elemsize
mgcmark.go#L1282: if b == s.base() {
mgcmark.go#L1288: for oblet := b + maxObletBytes; oblet < s.base()+s.elemsize; oblet += maxObletBytes {
mgcmark.go#L1298: n = s.base() + s.elemsize - b
mgcmark.go#L1434: obj := span.base() + idx*span.elemsize
mgcmark.go#L1486: arena, pageIdx, pageMask := pageIndexOf(span.base())
mgcmark.go#L1519: print(" s.base()=", hex(s.base()), " s.limit=", hex(s.limit), " s.spanclass=", s.spanclass, " s.elemsize=", s.elemsize, " s.state=")
mgcmark.go#L1574: arena, pageIdx, pageMask := pageIndexOf(span.base())
mgcsweep.go#L553: p := s.base() + objIndex*size
mgcsweep.go#L559: endOffset := p - s.base() + size
mgcsweep.go#L573: p := s.base() + uintptr(special.offset)
mgcsweep.go#L607: x := s.base() + i*s.elemsize
mgcsweep.go#L790: sysFault(unsafe.Pointer(s.base()), size)
mgcsweep.go#L833: addr := s.base() + i*s.elemsize
mgcwork.go#L391: newb := (*workbuf)(unsafe.Pointer(s.base() + i))
mheap.go#L506: func (s *mspan) base() uintptr {
mheap.go#L660: if s == nil || b < s.base() {
mheap.go#L735: if s == nil || s.state.get() != mSpanInUse || p < s.base() || p >= s.limit {
mheap.go#L1392: s.limit = s.base() + s.npages*pageSize
mheap.go#L1440: h.setSpans(s.base(), npages, s)
mheap.go#L1448: arena, pageIdx, pageMask := pageIndexOf(s.base())
mheap.go#L1551: pageTraceFree(getg().m.p.ptr(), 0, s.base(), s.npages)
mheap.go#L1556: base := unsafe.Pointer(s.base())
mheap.go#L1562: base := unsafe.Pointer(s.base())
mheap.go#L1583: pageTraceFree(getg().m.p.ptr(), 0, s.base(), s.npages)
mheap.go#L1604: print("mheap.freeSpanLocked - span ", s, " ptr ", hex(s.base()), " allocCount ", s.allocCount, " sweepgen ", s.sweepgen, "/", h.sweepgen, "\n")
mheap.go#L1610: arena, pageIdx, pageMask := pageIndexOf(s.base())
mheap.go#L1639: h.pages.free(s.base(), s.npages)
mheap.go#L1818: arenaPage := (s.base() / pageSize) % pagesPerArena
mheap.go#L1819: ai := arenaIndex(s.base())
mheap.go#L1826: arenaPage := (s.base() / pageSize) % pagesPerArena
mheap.go#L1827: ai := arenaIndex(s.base())
mheap.go#L1850: offset := uintptr(p) - span.base()
mheap.go#L1885: offset := uintptr(p) - span.base()
mwbbuf.go#L253: arena, pageIdx, pageMask := pageIndexOf(span.base())
signal_unix.go#L410: if s != nil && s.state.get() == mSpanManual && s.base() < sp && sp < s.limit {
signal_unix.go#L411: gp := *(**g)(unsafe.Pointer(s.base()))
stack.go#L212: x := gclinkptr(s.base() + i)
stack.go#L415: v = unsafe.Pointer(s.base())
stack.go#L491: println(hex(s.base()), v)
|
The pages are generated with Golds v0.6.7. (GOOS=linux GOARCH=amd64)
Golds is a Go 101 project developed by Tapir Liu.
PR and bug reports are welcome and can be submitted to the issue list.
Please follow @Go100and1 (reachable from the left QR code) to get the latest news of Golds. |